SEBAL Model with Meteorological data¶

Sebal Model implementation with actual meteorological data and chosen Landsat 8 scenes from MesoHyd Project, Leipzig University.
Data provided by Christopher Hutengs.

In [ ]:
# Loading packages
import numpy as np
import pandas as pd
import geopandas as gpd
from shapely.geometry import box
from sklearn.linear_model import LinearRegression
import xarray as xr
import rioxarray as rxr
import rasterio as rio
from rasterio.enums import Resampling
from rasterio.merge import merge
from rioxarray.merge import merge_arrays
from rasterio.warp import reproject, Resampling, calculate_default_transform
from rasterio.plot import show
import matplotlib
import matplotlib.pyplot as plt
import math
import random

# Loading geocube to raterize vector polygons (https://github.com/corteva/geocube)
from geocube.api.core import make_geocube

import spyndex
import earthpy as et
import earthpy.spatial as es
import earthpy.plot as ep
import tarfile
import pathlib
#import geowombat




import glob
import os
import os.path
import sys
In [ ]:
# Define if plots should be produced: "yes" or "no"
plots = "yes"

# Choose method of automate anchor pixel selection: "A" or "B"
hot_cold_selection = "A"

# Choose method of r_ah / wind speed: "map" or "station" (to apply wind map or single value from "assumed weather station" and also calculate u200)
wind_method = "station"
In [ ]:
# Path Conversion Tool
pathlib.PureWindowsPath(r"D:\Nicolas_D\Geodaten\Masterarbeit\DATA_MesoHyd_MA-SEBAL\Processed\study_area").as_posix()
Out[ ]:
'D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Processed/study_area'
In [ ]:
# Insert Start of data directory 
# path_data = "../../DATA_MesoHyd_MA-SEBAL/"   # Path data SC_UL
path_data = "D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/" # Path data NICOLAS-PC
In [ ]:
# ### FOR FASTER WORKING LOAD Landsat Rasterstack

# # Load Cliped and processed raster stack (NetCDF)
# LS_stack_clip = rxr.open_rasterio(path_data + "Processed/LANDSAT/LS_Processed/LS08_20150704.nc")

# # Print Properties with reloaded "new" LS_stack_clip (from NetCDF file)
# #print(LS_stack_clip)
# #print(LS_stack_clip.rio.crs)

# # Fix issues if lost CRS and lat/long dimensions of loaded LS_stack_clip from NetCDF
# #LS_stack_clip = LS_stack_clip.rename({"x": "long", "y": "lat"})
# #LS_stack_clip = LS_stack_clip.sortby(["band", "lat", "long"])
# LS_stack_clip.rio.write_crs("epsg:32632", inplace=True)
# LS_stack_clip = LS_stack_clip.squeeze()

# # Mask any values other than "Clear with low confidence on Clouds"
# LS_stack = LS_stack_clip.where(LS_stack_clip.QA_PIXEL.values == 21824)
In [ ]:
# Set date(s) of modelling/data retrieval
# date = "2015-07-04"  # Initial LS Picture
date = "2015-07-04"
date = pd.to_datetime(date, format = "%Y-%m-%d")
print("Date normal =", date)


# Date in meteo_raster_daily Path Format
date_met = date.date()
date_met = date_met.strftime('%Y-%m-%d')
print("Date INTERMET =", date_met)


# Date in WASIM-Raster Path Format
date_wasim = date.date()
date_wasim = date_wasim.strftime('%Y_%m_%d').replace("_0", "_").replace("_", "_")
print("Date WASIM =", date_wasim)


# Date in ETp-FAO56-Raster Path Format
date_etp = date.date()
date_etp = date_etp.strftime('%Y_%m_%d').replace("_0", "-").replace("_", "-")
print("Date ETp =", date_etp)


# Date SSEB Format
date_SSEB = date.date()
date_SSEB = date_SSEB.strftime('%Y_%m_%d').replace("-", "_")
print("Date SSEB =", date_SSEB)

# Date in Landsat Path Format
date_LS = date.date()
date_LS = date_LS.strftime('%Y%m%d')
print("Date Landsat =", date_LS)
Date normal = 2015-07-04 00:00:00
Date INTERMET = 2015-07-04
Date WASIM = 2015_7_4
Date ETp = 2015-7-4
Date SSEB = 2015_07_04
Date Landsat = 20150704

Import data/Test¶

Get help/tutorial from https://www.earthdatascience.org/courses/use-data-open-source-python/

Configure Data Access (Date, ...)¶

In [ ]:
# 0. Path for GeoTIFF
stack_name = "LS08_" + date_LS + ".tif"
stack_path = path_data + "Processed/LANDSAT/LS_Processed/" + stack_name

# Load Shapefile for Catchments
path_shapeA = path_data + "Processed/study_area/altenbamberg_catchment.gpkg"
shape_alten = gpd.read_file(path_shapeA)

path_shapeK = path_data + "Processed/study_area/kellenbach_catchment.gpkg"
shape_kellen = gpd.read_file(path_shapeK)

# Combine both Catchment Shapes
shape_catchments = pd.concat([shape_alten, shape_kellen])
In [ ]:
# # Load Cliped and processed Landsat raster stack (GeoTIFF), set CRS and apply Cloud Mask
# #def load_LS_stack():
# LS_stack_clip = rxr.open_rasterio(stack_path)
# LS_stack_clip.rio.write_crs("epsg:32632", inplace=True)
# LS_stack_clip = LS_stack_clip.squeeze()

# # Cloud Mask based on Landsat Quality Assessment Band, only keep "Clear with low confidence on Clouds"
# #LS_stack = LS_stack_clip.where(LS_stack_clip.QA_PIXEL.values == 21824) 
# print("GeoTIFF stack was loaded successfully")
In [ ]:
# Open Original Data in .tar archive
path1 = path_data + "Original/LANDSAT"
archive_pattern = f"*{date_LS}*.tar"
archive = sorted(glob.glob(os.path.join(path1, "**", archive_pattern), recursive=True))

# Create Directory for unpacked images
path2 =  path_data + "Processed/LANDSAT/" + "LS08_" + date_LS
path2
#path3 = data[45:-4] # Path for SC_UL
#path3 = data[74:-4]  # Path for NICOLAS-PC

#path_combined = os.path.join(path2, path3)
#path_combined
#os.mkdir(path_combined)

# Unpack .tar archive if Meta file already exists
file_test = glob.glob(path2 + "/*MTL.txt")

if not len(file_test) > 0:
    tar = tarfile.open(archive[0])
    tar.extractall(path = path2)
    tar.close()
    print("tar-archive with GeoTIFFS was unpacked")
else:
    print("tar-archive with GeoTIFFS was already unpacked")

# Open data files
data = sorted(glob.glob(path2 + "/*.TIF"))
data

###############################
# Open Landsat meta data
meta_path = sorted(glob.glob(path2 + "/*MTL.txt"))
meta = pd.read_csv(meta_path[0], sep = "=")

# Extract date and time from Landsat metadata
LS_date = meta[meta.iloc[:,0].str.contains("DATE_ACQUIRED")].iloc[0,1].strip() #.strip() to remove whitespace before string 
LS_time = meta[meta.iloc[:,0].str.contains("SCENE_CENTER_TIME")].iloc[0,1].strip().replace('"', '') 
LS_time = LS_time[0:8]
LS_date_time = LS_date + "-" + LS_time
LS_date_time = pd.to_datetime(LS_date_time, format = "%Y-%m-%d-%H:%M:%S")

###############################
# Extracting scale factors for Bands from Metafile
# Load and convert mutliplicative scale factor
scale_factor = meta[meta.iloc[:,0].str.contains("REFLECTANCE_MULT_BAND_1")].iloc[0,1]
scale_factor = float(scale_factor)
#print("Scale factor =", scale_factor)

# Load and convert additive offset
scale_add = meta[meta.iloc[:,0].str.contains("REFLECTANCE_ADD_BAND_1")].iloc[0,1]
scale_add = float(scale_add)
#print("Additive offset =", scale_add)

# open Metadatafile for extracting scale factors for THERMAL
# Load and convert mutliplicative scale factor for Band 10 (Surface Temp in Kelvin)
scale_factor_thermal = meta[meta.iloc[:,0].str.contains("TEMPERATURE_MULT_BAND_ST_B10")].iloc[0,1]
scale_factor_thermal = float(scale_factor_thermal)
#print("Scale factor Thermal =", scale_factor_thermal)

# Load and convert additive offset for Band 10 (Surface Temp in Kelvin)
scale_add_thermal = meta[meta.iloc[:,0].str.contains("TEMPERATURE_ADD_BAND_ST_B10")].iloc[0,1]
scale_add_thermal = float(scale_add_thermal)
#print("Additive offset Thermal =", scale_add_thermal)

###############################
# Load GeoTIFFs 
stack_dict = dict()

for i in range(2, 9):
    band_i = rxr.open_rasterio(data[i], masked = True).squeeze()
    band_corr = band_i*scale_factor+scale_add
    globals()[f"band_{i-1}"] = band_corr
    #print([f"band_{i-1}"])
    stack_dict[f"band_{i-1}"] = band_corr
    
###############################
# Add other needed bands to dictionary
# Band 10 Surface Temperature
stack_dict["band_10"] = rxr.open_rasterio(data[11], masked = True).squeeze()*scale_factor_thermal+scale_add_thermal

# Thermal Radiance TRAD
stack_dict["band_trad"] = rxr.open_rasterio(data[17], masked = True).squeeze()*0.001

# Emissivity estimated from ASTER_GED
stack_dict["emis"] = rxr.open_rasterio(data[14], masked = True).squeeze()*0.0001

# Level 2 Quality Pixel Band
stack_dict["QA_PIXEL"] = rxr.open_rasterio(data[0], masked = True).squeeze()

# Atmspheric Transmissivity Band
stack_dict["AtmoTrans"] = rxr.open_rasterio(data[10], masked = True).squeeze()*0.0001    

###############################
# Convert dictionaries xarrays into xr.Dataset
LS_stack = xr.Dataset(stack_dict)

###############################    
# Clip Landsat Raster with Shapefile
LS_stack_clip = LS_stack.rio.clip(shape_catchments.geometry, all_touched = True)

###############################  
# Safe cliped and processed raster stack to GeoTIFF Stack
#LS_stack_clip_float32 = LS_stack_clip.astype(np.float32)
#LS_stack_clip_float32.rio.to_raster(raster_path = (path_data + "Processed/LANDSAT/LS_Processed/" + stack_name))


###############################
# Cloud Mask based on Landsat Quality Assessment Band, only keep "Clear with low confidence on Clouds"
LS_stack = LS_stack_clip.where(LS_stack_clip.QA_PIXEL.values == 21824)

print("GeoTIFFs were loaded successfully")
tar-archive with GeoTIFFS was already unpacked
GeoTIFFs were loaded successfully

Import Landsat 8 data - VIS/NIR¶

Quick analysis of Land surface Temperature (Band 10)¶

In [ ]:
# T in Celsius

if plots == "yes": 
    band_10_cels = LS_stack.band_10 - 273.15    
    
    # Colormap 
    from  matplotlib.colors import LinearSegmentedColormap
    cmap=LinearSegmentedColormap.from_list('rg',["b", "w", "r"], N=256) 
    
    # Plot the data
    f, ax=plt.subplots()
    band_10_cels.plot.imshow(ax=ax,
                  cmap=cmap)
    ax.set_axis_off()
    ax.set_title("Plot of Band 10")
    plt.show()  
    
    # Print max/min T
    print("Min:", np.nanmin(LS_stack.band_10.values), "K")
    print("Max:", np.nanmax(LS_stack.band_10.values), "K")
    print("Min:", np.nanmin(band_10_cels.values), "°C")
    print("Max:", np.nanmax(band_10_cels.values), "°C")
Min: 289.88736 K
Max: 323.78387 K
Min: 16.737366 °C
Max: 50.63388 °C
In [ ]:
if plots == "yes": 
    LS_stack.band_10.plot.hist(bins = 100)
    plt.show()
    band_10_cels.plot.hist(bins = 100)
    plt.show()

Processing Steps¶

1. Preprocessing¶

1. Satellite Data: see above¶

2. Land Use Map¶

In [ ]:
# Open Corine Land Use Maps for both catchments
path_LUA = path_data + "Original/study_area/altenuse-ch.tif"
LU_alten = rxr.open_rasterio(path_LUA).squeeze()


path_LUK = path_data + "Original/study_area/kelleuse-ch.tif"
LU_kellen = rxr.open_rasterio(path_LUK).squeeze()

# Merge raster
LU_mosaic = merge_arrays(dataarrays = [LU_alten, LU_kellen], nodata = np.nan)
#LU_mosaic = LU_mosaic .rename({"x": "long", "y": "lat"})
LU_mosaic.rio.write_crs("epsg:32632", inplace=True)

# Reproject and Resample Land Use Raster

# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = LU_mosaic
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

LU_mosaic = xds_repr_match
In [ ]:
# Merge rasters 
if plots == "yes": 
    LU_mosaic.plot()
In [ ]:
# Combine with Land Cover Key Table
LU_table_path = "../data/clc_legend.csv"
LU_table = pd.read_csv(LU_table_path) #delimiter = "\t", header = [3]
print(LU_table)

LU_agri = [211, 212, 221, 222, 223, 241, 242, 243, 244]
LU_bare = [331, 332, 333, 334, 131]
LU_grass = [231, 321, 322, 323, 324, 411, 412]
LU_urban = [111, 112, 121, 122, 123, 124, 132, 133, 141, 142]
LU_forest = [311, 312, 313]
LU_water = [511, 512, 335]
    GRID_CODE  CLC_CODE                         LABEL1  \
0           1       111            Artificial surfaces   
1           2       112            Artificial surfaces   
2           3       121            Artificial surfaces   
3           4       122            Artificial surfaces   
4           5       123            Artificial surfaces   
5           6       124            Artificial surfaces   
6           7       131            Artificial surfaces   
7           8       132            Artificial surfaces   
8           9       133            Artificial surfaces   
9          10       141            Artificial surfaces   
10         11       142            Artificial surfaces   
11         12       211             Agricultural areas   
12         13       212             Agricultural areas   
13         14       213             Agricultural areas   
14         15       221             Agricultural areas   
15         16       222             Agricultural areas   
16         17       223             Agricultural areas   
17         18       231             Agricultural areas   
18         19       241             Agricultural areas   
19         20       242             Agricultural areas   
20         21       243             Agricultural areas   
21         22       244             Agricultural areas   
22         23       311  Forest and semi natural areas   
23         24       312  Forest and semi natural areas   
24         25       313  Forest and semi natural areas   
25         26       321  Forest and semi natural areas   
26         27       322  Forest and semi natural areas   
27         28       323  Forest and semi natural areas   
28         29       324  Forest and semi natural areas   
29         30       331  Forest and semi natural areas   
30         31       332  Forest and semi natural areas   
31         32       333  Forest and semi natural areas   
32         33       334  Forest and semi natural areas   
33         34       335  Forest and semi natural areas   
34         35       411                       Wetlands   
35         36       412                       Wetlands   
36         37       421                       Wetlands   
37         38       422                       Wetlands   
38         39       423                       Wetlands   
39         40       511                   Water bodies   
40         41       512                   Water bodies   
41         42       521                   Water bodies   
42         43       522                   Water bodies   
43         44       523                   Water bodies   

                                             LABEL2  \
0                                      Urban fabric   
1                                      Urban fabric   
2        Industrial, commercial and transport units   
3        Industrial, commercial and transport units   
4        Industrial, commercial and transport units   
5        Industrial, commercial and transport units   
6                 Mine, dump and construction sites   
7                 Mine, dump and construction sites   
8                 Mine, dump and construction sites   
9      Artificial, non-agricultural vegetated areas   
10     Artificial, non-agricultural vegetated areas   
11                                      Arable land   
12                                      Arable land   
13                                      Arable land   
14                                  Permanent crops   
15                                  Permanent crops   
16                                  Permanent crops   
17                                         Pastures   
18                 Heterogeneous agricultural areas   
19                 Heterogeneous agricultural areas   
20                 Heterogeneous agricultural areas   
21                 Heterogeneous agricultural areas   
22                                          Forests   
23                                          Forests   
24                                          Forests   
25  Scrub and/or herbaceous vegetation associations   
26  Scrub and/or herbaceous vegetation associations   
27  Scrub and/or herbaceous vegetation associations   
28  Scrub and/or herbaceous vegetation associations   
29         Open spaces with little or no vegetation   
30         Open spaces with little or no vegetation   
31         Open spaces with little or no vegetation   
32         Open spaces with little or no vegetation   
33         Open spaces with little or no vegetation   
34                                  Inland wetlands   
35                                  Inland wetlands   
36                                Maritime wetlands   
37                                Maritime wetlands   
38                                Maritime wetlands   
39                                    Inland waters   
40                                    Inland waters   
41                                    Marine waters   
42                                    Marine waters   
43                                    Marine waters   

                                               LABEL3          RGB  
0                             Continuous urban fabric  230-000-077  
1                          Discontinuous urban fabric  255-000-000  
2                      Industrial or commercial units  204-077-242  
3          Road and rail networks and associated land  204-000-000  
4                                          Port areas  230-204-204  
5                                            Airports  230-204-230  
6                            Mineral extraction sites  166-000-204  
7                                          Dump sites  166-077-000  
8                                  Construction sites  255-077-255  
9                                   Green urban areas  255-166-255  
10                       Sport and leisure facilities  255-230-255  
11                          Non-irrigated arable land  255-255-168  
12                         Permanently irrigated land  255-255-000  
13                                        Rice fields  230-230-000  
14                                          Vineyards  230-128-000  
15                  Fruit trees and berry plantations  242-166-077  
16                                       Olive groves  230-166-000  
17                                           Pastures  230-230-077  
18       Annual crops associated with permanent crops  255-230-166  
19                       Complex cultivation patterns  255-230-077  
20  Land principally occupied by agriculture, with...  230-204-077  
21                                Agro-forestry areas  242-204-166  
22                                Broad-leaved forest  128-255-000  
23                                  Coniferous forest  000-166-000  
24                                       Mixed forest  077-255-000  
25                                 Natural grasslands  204-242-077  
26                                Moors and heathland  166-255-128  
27                          Sclerophyllous vegetation  166-230-077  
28                        Transitional woodland-shrub  166-242-000  
29                              Beaches, dunes, sands  230-230-230  
30                                         Bare rocks  204-204-204  
31                           Sparsely vegetated areas  204-255-204  
32                                        Burnt areas  000-000-000  
33                        Glaciers and perpetual snow  166-230-204  
34                                     Inland marshes  166-166-255  
35                                          Peat bogs  077-077-255  
36                                       Salt marshes  204-204-255  
37                                            Salines  230-230-255  
38                                   Intertidal flats  166-166-230  
39                                      Water courses  000-204-242  
40                                       Water bodies  128-242-230  
41                                    Coastal lagoons  000-255-166  
42                                          Estuaries  166-255-230  
43                                      Sea and ocean  230-242-255  
In [ ]:
# Create filters from Land Use classes
agri_filter = np.isin(LU_mosaic.values, LU_agri)
bare_filter = np.isin(LU_mosaic.values, LU_bare)
grass_filter = np.isin(LU_mosaic.values, LU_grass)
urban_filter = np.isin(LU_mosaic.values, LU_urban)
forest_filter = np.isin(LU_mosaic.values, LU_forest)
water_filter = np.isin(LU_mosaic.values, LU_water)

3. Meteorolocial Data¶

3.1 Wind¶

Load and process wind data (Station data, one point per catchment, from WASIM Meteo Data)

In [ ]:
# Read Wind tables from meteo data
path_meteo: str = path_data + "Processed/meteo_original/"
path_wind_alten: str = path_meteo + "Wind2010_alten.txt"
path_wind_kellen: str = path_meteo + "Wind2010_kellen.txt"
wind_alten = pd.read_csv(path_wind_alten, delimiter = "\t", header = [3])
wind_kellen = pd.read_csv(path_wind_kellen, delimiter = "\t", header = [3])

# Change Hour to Start time of day at 00:00 midnight 
wind_alten.iloc[:,3] = wind_alten.iloc[:,3]-1 
wind_kellen.iloc[:,3] = wind_kellen.iloc[:,3]-1 

# Create DateTimeIndex based on Date columns
alten_wind_date_index = pd.to_datetime(wind_alten.iloc[:,0:4].astype(str).agg("-".join, axis = 1), format = "%Y-%m-%d-%H")
kellen_wind_date_index = pd.to_datetime(wind_kellen.iloc[:,0:4].astype(str).agg("-".join, axis = 1), format = "%Y-%m-%d-%H")

# Built final wind dataframe
wind_alten_df = wind_alten.iloc[:,4].to_frame()
wind_kellen_df = wind_kellen.iloc[:,4].to_frame()

# Add Date Index
wind_alten_df = wind_alten_df.set_index([alten_wind_date_index])
wind_kellen_df = wind_kellen_df.set_index([kellen_wind_date_index])

# Outer Join for both data frames Altenbamberg and Kellenbach
wind_df = pd.concat([wind_alten_df, wind_kellen_df], axis = 1)
wind_df.columns = ["wind_altenbamberg", "wind_kellenbach"]
wind_df.index.name = "date"

# Extracting Wind Speed for respective Date and add to map per Catchment
wind_day = pd.Series([wind_df.loc[date][1], wind_df.loc[date][0]], index = [0,0])
wind_vec = shape_catchments.assign(wind = wind_day)

 
    
#################################################    
# Rasterize vector polygon with geocube
out_grid = make_geocube(
    vector_data = wind_vec,
    output_crs = LS_stack.rio.crs,
    resolution = (-30, 30)
)

# Resample raster big to same resolution (30x30m)
# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = out_grid
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

wind_map = xds_repr_match.wind

if plots == "yes": 
    wind_map.plot()    

# Export to csv
# wind_df.to_csv("D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Processed/meteo_processed/wind_velocity.csv")
C:\Users\nicol\AppData\Local\Temp\ipykernel_21996\4018496343.py:30: FutureWarning: Series.__getitem__ treating keys as positions is deprecated. In a future version, integer keys will always be treated as labels (consistent with DataFrame behavior). To access a value by position, use `ser.iloc[pos]`
  wind_day = pd.Series([wind_df.loc[date][1], wind_df.loc[date][0]], index = [0,0])

2. Surface Radiation Balance¶

2.1 NDVI, SAVI, LAI¶

In [ ]:
# NDVI
# NDVI = (band_5 - band_4) / (band_5 + band_4)

NDVI = spyndex.computeIndex(
    index = "NDVI",
    params = {
        "N": LS_stack.band_5,
        "R": LS_stack.band_4
    }
)

if plots == "yes": 
    NDVI.plot(cmap = "Greens")
In [ ]:
# SAVI
# SAVI = (1+L_savi)*(band_5 - band_4) / (L_savi+band_5 + band_4)
L_savi = 0.5       # Value should be adjusted according to Soil Moisture (0.1 and 0.5 are suggested)

SAVI = spyndex.computeIndex(
    index = "SAVI",
    params = {
        "L": L_savi,
        "N": LS_stack.band_5,
        "R": LS_stack.band_4
    }
)

LAI¶

Calcualtion of LAI based on function from Olmedo et al. 2016
"water: Tools and Functions to Estimate Actual Evapotranspiration Using Land Surface Energy Balance Models in R" https://journal.r-project.org/archive/2016/RJ-2016-051/index.html

In [ ]:
# Function of LAI 

def LAI_func(method):
    if method == "metric":
        LAI = -(np.log((0.69-SAVI)/0.59)/0.91) 
    elif method == "metric2010":
        LAI = 11*(SAVI**3)
    elif method == "vineyard":
        LAI = 4.9*NDVI-0.46
    elif method == "MCB":
        LAI = 1.2 - 3.08 * np.exp(-2013.35 * NDVI**6.41)
    elif method == "turner":
        LAI = 0.5724 + 0.0989 * NDVI - 0.0114 * NDVI**2 + 0.0004 * NDVI**3
    return(LAI)
In [ ]:
# Calculate LAI with mean of two Formulas after Jaafar_2020
LAI = (LAI_func("metric") + LAI_func("metric2010")) / 2 

if plots == "yes": 
    LAI.plot(cmap = "Greens")
    
if plots == "yes":     
    LAI.plot.hist(bins = 50)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)

2.2 Albedo¶

Calcualtion of Albedo based on LS 8 BOA Reflectances, based on Olmedo et al. 2016
"water: Tools and Functions to Estimate Actual Evapotranspiration Using Land Surface Energy Balance Models in R" https://journal.r-project.org/archive/2016/RJ-2016-051/index.html

In [ ]:
# Create band List for Albedo function

def create_band_list(n_bands=7):
  band_list = []
  for i in range(2, n_bands + 1):
    band_name = "band_" + str(i)
    band_list.append(band_name)
  return band_list


# Create a list with names `band_1` to `band_7`
band_list = create_band_list(n_bands=7)

# Function for Albedo with three different coefficient options
def albedo(LS_image, coeff, band_list):
    if coeff == "Tasumi":
        wb = np.array([0.254, 0.149, 0.147, 0.311, 0.103, 0.036])
    elif coeff == "Olmedo":
        wb = np.array([0.246, 0.146, 0.191, 0.304, 0.105, 0.008])
    elif coeff == "Liang":
        wb = np.array([0.356, 0, 0.13, 0.373, 0.085, 0.072])

    # Calculate the albedo for each band.
    albedo = LS_stack[band_list[0]] * wb[0]
    for i in range(1, 6):
            albedo += LS_image[band_list[i]] * wb[i]
            
    return(albedo)
In [ ]:
# Calculate Albedo with default coefficients from "Olmedo"
albedo_unfilt = albedo(LS_stack, "Olmedo", band_list)
albedo = albedo_unfilt.where((albedo_unfilt >= 0) & (albedo_unfilt <= 1))

if plots == "yes": 
    albedo.plot()

2.3 Rs - Incoming Shortwave Radiation¶

2.3.1 InterMet bigger raster, hourly¶

Use of Spatial, hourly Radiation Data from INTERMET (choose time closest to Landsat overpass time)
NOTE: Maybe need to correct time (+1h) for meteorological data, because Landsat Aquisition time is GMT --> time zone for InterMet data is yet unknown(?)

In [ ]:
# DATA FROM InterMet/[specific date] --> hourly rasters of radiation

# Date in INTERMET-Raster Path Format with time (hour)
hour = LS_date_time.round("H").strftime("%H")
date_InterMet = date_LS + hour

# Create Path for recursive file search based on defined date
path_intermet = path_data + "Original/InterMet/"
pattern = f"*rad*{date_InterMet}*.tif"
paths_intermet_rad = sorted(glob.glob(os.path.join(path_intermet, "**", pattern), recursive=True))
print(paths_intermet_rad)

# Open INTERMET hourly Radiation Raster (biger area, catchments have to be clipped)
intermet_rad_big = rxr.open_rasterio(paths_intermet_rad[0]).squeeze()
#intermet_rad_big = intermet_rad_big.where(intermet_rad_big > 0)
#intermet_rad_alten = intermet_rad_alten.rename({"x": "long", "y": "lat"})
#intermet_rad_alten = intermet_rad_alten.sortby(["lat", "long"])
intermet_rad_big.rio.write_crs("epsg:31466", inplace=True)
intermet_rad_big.rio.write_nodata(intermet_rad_big.rio.nodata, encoded=True, inplace=True)
['D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Original/InterMet\\2015-07-04\\Intermet_hourly_rad_2015070410.tif']
Out[ ]:
<xarray.DataArray (y: 255, x: 233)>
[59415 values with dtype=float32]
Coordinates:
    band         int32 1
  * x            (x) float64 2.502e+06 2.503e+06 ... 2.733e+06 2.734e+06
  * y            (y) float64 5.669e+06 5.668e+06 ... 5.416e+06 5.415e+06
    spatial_ref  int32 0
Attributes:
    AREA_OR_POINT:       Area
    STATISTICS_MAXIMUM:  701
    STATISTICS_MEAN:     634.53444297147
    STATISTICS_MINIMUM:  415
    STATISTICS_STDDEV:   27.747302132261
    scale_factor:        1.0
    add_offset:          0.0
xarray.DataArray
  • y: 255
  • x: 233
  • ...
    [59415 values with dtype=float32]
    • band
      ()
      int32
      1
      array(1)
    • x
      (x)
      float64
      2.502e+06 2.503e+06 ... 2.734e+06
      array([2502231., 2503231., 2504231., ..., 2732231., 2733231., 2734231.])
    • y
      (y)
      float64
      5.669e+06 5.668e+06 ... 5.415e+06
      array([5669068., 5668068., 5667068., ..., 5417068., 5416068., 5415068.])
    • spatial_ref
      ()
      int32
      0
      crs_wkt :
      PROJCS["DHDN / 3-degree Gauss-Kruger zone 2",GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4314"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",6],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Northing",NORTH],AXIS["Easting",EAST],AUTHORITY["EPSG","31466"]]
      semi_major_axis :
      6377397.155
      semi_minor_axis :
      6356078.962818189
      inverse_flattening :
      299.1528128
      reference_ellipsoid_name :
      Bessel 1841
      longitude_of_prime_meridian :
      0.0
      prime_meridian_name :
      Greenwich
      geographic_crs_name :
      DHDN
      horizontal_datum_name :
      Deutsches Hauptdreiecksnetz
      projected_crs_name :
      DHDN / 3-degree Gauss-Kruger zone 2
      grid_mapping_name :
      transverse_mercator
      latitude_of_projection_origin :
      0.0
      longitude_of_central_meridian :
      6.0
      false_easting :
      2500000.0
      false_northing :
      0.0
      scale_factor_at_central_meridian :
      1.0
      spatial_ref :
      PROJCS["DHDN / 3-degree Gauss-Kruger zone 2",GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4314"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",6],PARAMETER["scale_factor",1],PARAMETER["false_easting",2500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Northing",NORTH],AXIS["Easting",EAST],AUTHORITY["EPSG","31466"]]
      GeoTransform :
      2501731.0 1000.0 0.0 5669568.0 0.0 -1000.0
      array(0)
    • x
      PandasIndex
      PandasIndex(Index([2502231.0, 2503231.0, 2504231.0, 2505231.0, 2506231.0, 2507231.0,
             2508231.0, 2509231.0, 2510231.0, 2511231.0,
             ...
             2725231.0, 2726231.0, 2727231.0, 2728231.0, 2729231.0, 2730231.0,
             2731231.0, 2732231.0, 2733231.0, 2734231.0],
            dtype='float64', name='x', length=233))
    • y
      PandasIndex
      PandasIndex(Index([5669068.0, 5668068.0, 5667068.0, 5666068.0, 5665068.0, 5664068.0,
             5663068.0, 5662068.0, 5661068.0, 5660068.0,
             ...
             5424068.0, 5423068.0, 5422068.0, 5421068.0, 5420068.0, 5419068.0,
             5418068.0, 5417068.0, 5416068.0, 5415068.0],
            dtype='float64', name='y', length=255))
  • AREA_OR_POINT :
    Area
    STATISTICS_MAXIMUM :
    701
    STATISTICS_MEAN :
    634.53444297147
    STATISTICS_MINIMUM :
    415
    STATISTICS_STDDEV :
    27.747302132261
    scale_factor :
    1.0
    add_offset :
    0.0
In [ ]:
# Reproject intermet raster big to same CRS
intermet_rad_big = intermet_rad_big.rio.reproject(LS_stack.rio.crs)
intermet_rad_big = intermet_rad_big.where(intermet_rad_big > 0)
In [ ]:
# Plot Catchments within Raster

if plots == "yes": 
    f, ax = plt.subplots()
    intermet_rad_big.plot.imshow(ax=ax)

    shape_catchments.plot(ax=ax,
                        facecolor="none", 
                        linewidth=0.4)
    ax.set(title="Raster Layer with Shapefile Overlayed")


    ax.set_axis_off()
    plt.show()
In [ ]:
# Clip raster by catchment vectors
intermet_rad_clip = intermet_rad_big.rio.clip(shape_catchments.geometry, all_touched = True)

# Resample rad raster big to same resolution (30x30m)
# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = intermet_rad_clip
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

intermet_rad_clip = xds_repr_match
RS_in = intermet_rad_clip

if plots == "yes": 
    intermet_rad_clip.plot()

2.4 RL - Outgoing Longwave Radiation¶

Surface Emissivity (C, 2.), Thermal Radiance (C, 3.), Surface Temperature (C, 4.) are not needed at the moment because we already got Surface Temperature from Landsat Level 2

In [ ]:
if plots == "yes": 
    print("Max LAI:", np.nanmax(LAI.values))
Max LAI: 7.3279047
In [ ]:
emis_null = xr.where((LAI < 3) | LAI.isnull(), 0.95 + 0.01*LAI, 0.98)
In [ ]:
if plots == "yes": 
    # Plot the data
    f, ax=plt.subplots()
    emis_null.plot.imshow(ax=ax,
                    cmap = "Greys")
    ax.set_axis_off()
    ax.set_title("Plot of Emissivity_0")
    plt.show()
In [ ]:
# Outgoing Longwave Radiation
# typical Range: 200-700 W/m²
Ts = LS_stack.band_10    # Surface Temperature in Kelvin 
sigma = 5.67*10**-8     # Boltzmann constant in W/m²/K**4

RL_out = emis_null * sigma * Ts**4     # With emis_null from SEBAL_manual formulas

# Plot the data
if plots == "yes": 
    f, ax=plt.subplots()
    RL_out.plot.imshow(ax=ax,
                    cmap = "Reds")
    ax.set_axis_off()
    ax.set_title("Plot of RL - Outgoing Longwave Radiation - emis_null")
    plt.show()

2.5 Choosing Hot and Cold Pixel¶

In [ ]:
# zom assigned to Land Use classes after SEBAL_Diss_knoeffel
LU_list = [LU_agri, LU_bare, LU_grass, LU_urban, LU_forest, LU_water]
zom_list = [0.018 * LAI, 0.004, 0.02, 0.2, 0.5, 0.001]

zom_map = np.nan  # Default value if none of the conditions match


for i in range(0, len(LU_list)):
    zom_map = xr.where(LU_mosaic.isin(LU_list[i]), zom_list[i], zom_map)
    
# Apply Cloud Filter also to zom_map
zom_map = zom_map.where(LS_stack_clip.QA_PIXEL.values == 21824)    
    
if plots == "yes":
    zom_map.plot()
In [ ]:
# Selecting Cold/Wet Pixel
cold_px_map = LS_stack.where(agri_filter)
if plots == "yes":     
    cold_px_map.band_10.plot()

2.5. A) After SEBAL_Dissertation_knoeffel_2018 and SEBAL_manual¶

In [ ]:
## METHOD AFTER SEBAL_Dissertation_Knoeffel_2018 and SEBAL_manual

if hot_cold_selection == "A": 
    
    LAI_filt = LAI.where(agri_filter)
    filt_cold = LS_stack.band_10.where(LAI_filt > 3)
    filt_hot = LS_stack.band_10.where(LAI_filt < 1)

    # Select cold/wet pixel
    T_cold = filt_cold.where(filt_cold==filt_cold.min(), drop=True).squeeze().values
    cold_x = filt_cold.where(filt_cold==filt_cold.min(), drop=True).squeeze().coords["x"].values
    cold_y = filt_cold.where(filt_cold==filt_cold.min(), drop=True).squeeze().coords["y"].values
    LU_cold = LU_mosaic.where(filt_cold==filt_cold.min(), drop=True).squeeze()
    zom_cold = zom_map.sel(x = cold_x, y = cold_y)

    print("T_cold:", T_cold)
    print("Land use:", LU_cold.values, "// zom_cold:", zom_cold.values)
    print("Overall minimum Ts:", np.nanmin(LS_stack.band_10.values))



    # Selecting Hot/Dry Pixel
    T_hot = filt_hot.where(filt_hot==filt_hot.max(), drop=True).squeeze().values
    hot_x = filt_hot.where(filt_hot==filt_hot.max(), drop=True).squeeze().coords["x"].values
    hot_y = filt_hot.where(filt_hot==filt_hot.max(), drop=True).squeeze().coords["y"].values
    LU_hot = LU_mosaic.where(filt_hot==filt_hot.max(), drop=True).squeeze()
    zom_hot = zom_map.sel(x = hot_x, y = hot_y)


    print("T_hot:", T_hot)
    print("Land use:", LU_hot.values, "// zom_hot:", zom_hot.values)
    print("Overall maximum Ts:", np.nanmax(LS_stack.band_10.values))


    if plots == "yes":  
        # Plot the data: Hot and Cold Pixel
        f, ax=plt.subplots()
        LS_stack.band_10.plot()
        plt.scatter(cold_x, cold_y, color='blue', s=5)
        plt.scatter(hot_x, hot_y, color='red', s=5)
        ax.set_axis_off()
        ax.set_title("")
        plt.show()
T_cold: 293.61642
Land use: 211.0 // zom_cold: 0.06052375
Overall minimum Ts: 289.88736
T_hot: 317.39557
Land use: 211.0 // zom_hot: 0.0016448442
Overall maximum Ts: 323.78387

Plot Hot Pixel¶

In [ ]:
hot = LS_stack.sel(x=slice(hot_x - 1500, hot_x + 1500), y=slice(hot_y + 1500, hot_y - 1500))
hot.band_10.plot()
plt.scatter(hot_x, hot_y, color='red', s=5)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25d994e99d0>
In [ ]:
# Plot array
rgb = np.dstack((hot.band_4,
                    hot.band_3,
                    hot.band_2))


# Visualize RGB
plt.imshow(rgb)
Out[ ]:
<matplotlib.image.AxesImage at 0x25e0d988f50>
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\matplotlib\cm.py:494: RuntimeWarning: invalid value encountered in cast
  xx = (xx * 255).astype(np.uint8)
In [ ]:
hotNDVI = NDVI.sel(x=slice(hot_x - 1500, hot_x + 1500), y=slice(hot_y + 1500, hot_y - 1500))
hotNDVI.plot()
plt.scatter(hot_x, hot_y, color='red', s=5)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25d994b5c10>

Plot Cold Pixel¶

In [ ]:
cold = LS_stack.sel(x=slice(cold_x - 1500, cold_x + 1500), y=slice(cold_y + 1500, cold_y - 1500))
cold.band_10.plot()
plt.scatter(cold_x, cold_y, color='blue', s=5)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25e0dadce10>
In [ ]:
# Plot array
rgb = np.dstack((cold.band_4,
                    cold.band_3,
                    cold.band_2))


# Visualize RGB
plt.imshow(rgb)
Out[ ]:
<matplotlib.image.AxesImage at 0x25e0f06b610>
In [ ]:
coldNDVI = NDVI.sel(x=slice(cold_x - 1500, cold_x + 1500), y=slice(cold_y + 1500, cold_y - 1500))
coldNDVI.plot()
plt.scatter(cold_x, cold_y, color="blue", s=5)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25e0f10ff50>

2.5 B) After Allen 2013 / Olmedo 2016 / Jaafar 2020¶

(Combination of all approaches)

In [ ]:
# Select cold/wet pixel

if hot_cold_selection == "B": 
    
    NDVI_filt = NDVI.where(agri_filter) 
    filt_cold1 = LS_stack.band_10.where(NDVI_filt.values > np.nanpercentile(NDVI_filt, 95))  # Upper 5 % NDVI
    filt_cold2 = filt_cold1.where(filt_cold1 < np.nanpercentile(filt_cold1.values, 20))  # Lowest 20 % Ts 
    filt_cold3 = filt_cold2.where((filt_cold2 >= filt_cold2.mean(skipna = True).values - 0.2) & (filt_cold2 <= filt_cold2.mean(skipna = True).values + 0.2))
    filt_cold4 = filt_cold3.where((albedo >= 0.18) & (albedo <= 0.25))



    T_cold_target = filt_cold4.median()

    ####
    # Find Cold Pixel nearest to T_cold
    # https://www.geeksforgeeks.org/find-the-nearest-value-and-the-index-of-numpy-array/

    # calculate the difference array
    arr = filt_cold4
    difference_array = np.absolute(arr-T_cold_target)
    T_cold_coords = difference_array.min()
    cold_x = arr.where(difference_array == difference_array.min(), drop = True).squeeze().coords["x"].values
    cold_y = arr.where(difference_array == difference_array.min(), drop = True).squeeze().coords["y"].values

    # Pixel
    T_cold = filt_cold4.sel(x = cold_x, y = cold_y).values
    LU_cold = LU_mosaic.sel(x = cold_x, y = cold_y)
    zom_cold = zom_map.sel(x = cold_x, y = cold_y)

    print("T_cold_targed:", T_cold_target.values, "// T_cold:", T_cold,)
    print("Difference:", difference_array.min().values, "// Land use:", LU_cold.values)
    print("zom_cold:", zom_cold.values)
    print("Overall minimum Ts:", np.nanmin(LS_stack.band_10.values))
In [ ]:
# Select hot/dry pixel

if hot_cold_selection == "B": 
    
    NDVI_filt = NDVI.where(agri_filter) 
    filt_hot1 = LS_stack.band_10.where(NDVI_filt.values < np.nanpercentile(NDVI_filt, 10))  # Lower 10 % NDVI
    filt_hot2 = filt_hot1.where(filt_hot1 > np.nanpercentile(filt_hot1.values, 80))  # Lowest 20 % Ts 
    filt_hot3 = filt_hot2.where((filt_hot2 >= filt_hot2.mean(skipna = True).values - 0.2) & (filt_hot2 <= filt_hot2.mean(skipna = True).values + 0.2))

    T_hot_target = filt_hot3.median()

    ####
    # Find Cold Pixel nearest to T_cold
    # https://www.geeksforgeeks.org/find-the-nearest-value-and-the-index-of-numpy-array/

    # calculate the difference array and pixel with minimum difference to T_hot_target 
    arr = filt_hot3
    difference_array = np.absolute(arr-T_hot_target)
    T_hot_coords = difference_array.min()
    # Hot Pixel candidates
    hot_x_cand = arr.where(difference_array == difference_array.min(), drop = True).squeeze().coords["x"].values 
    hot_y_cand = arr.where(difference_array == difference_array.min(), drop = True).squeeze().coords["y"].values

    # Pick first of all possible hot pixels candidates (could also be random)
    hot_x = hot_x_cand[0]
    hot_y = hot_y_cand[0]

    # Hot Pixel
    T_hot = filt_hot3.sel(x = hot_x, y = hot_y).values
    LU_hot = LU_mosaic.sel(x = hot_x, y = hot_y)
    zom_hot = zom_map.sel(x = hot_x, y = hot_y)

    print("T_hot_target:", T_hot_target.values, "// Count of hot pixel candidates:", hot_x_cand.size)
    print("T_hot:", T_hot)
    print("Difference:", difference_array.min().values, "// Land use:", LU_hot.values)
    print("zom_hot:", zom_hot.values)
    print("Overall maximum Ts:", np.nanmax(LS_stack.band_10.values))
In [ ]:
if hot_cold_selection == "B":
    
    if plots == "yes":  
        # Plot the data: Hot and Cold Pixel
        f, ax=plt.subplots()
        LS_stack.band_10.plot()
        plt.scatter(cold_x, cold_y, color='blue', s=5)
        plt.scatter(hot_x, hot_y, color='red', s=5)
        ax.set_axis_off()
        ax.set_title("")
        plt.show()

2.6 RL - Incoming Longwave Radiation¶

2.6.1 Load DEM¶

In [ ]:
# Load DEM for RL_in
path_DEM= path_data + "Original/DEM/DEM_RLP_subset_30m.tif"


DEM_raw = rxr.open_rasterio(path_DEM).squeeze()
DEM = DEM_raw.isel(band=0)

# Merge raster
DEM.rio.write_crs("epsg:32632", inplace=True)

# # Reproject and Resample Land Use Raster

# # Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
# xds_match = LS_stack
# xds = DEM_30
# xds_repr_match = xds.rio.reproject_match(xds_match)

# xds_repr_match = xds_repr_match.assign_coords({
#     "x": xds_match.x,
#     "y": xds_match.y,
# })

# DEM_30 = xds_repr_match
Out[ ]:
<xarray.DataArray (y: 2701, x: 2372)>
[6406772 values with dtype=float64]
Coordinates:
    band         int32 1
  * x            (x) float64 3.586e+05 3.586e+05 ... 4.297e+05 4.297e+05
  * y            (y) float64 5.556e+06 5.556e+06 ... 5.475e+06 5.475e+06
    spatial_ref  int32 0
Attributes:
    AREA_OR_POINT:     Area
    TIFFTAG_SOFTWARE:  GRASS GIS 7.8.5 with GDAL 3.1.4
    _FillValue:        nan
    scale_factor:      1.0
    add_offset:        0.0
xarray.DataArray
  • y: 2701
  • x: 2372
  • ...
    [6406772 values with dtype=float64]
    • band
      ()
      int32
      1
      array(1)
    • x
      (x)
      float64
      3.586e+05 3.586e+05 ... 4.297e+05
      array([358560., 358590., 358620., ..., 429630., 429660., 429690.])
    • y
      (y)
      float64
      5.556e+06 5.556e+06 ... 5.475e+06
      array([5556060., 5556030., 5556000., ..., 5475120., 5475090., 5475060.])
    • spatial_ref
      ()
      int32
      0
      crs_wkt :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      semi_major_axis :
      6378137.0
      semi_minor_axis :
      6356752.314245179
      inverse_flattening :
      298.257223563
      reference_ellipsoid_name :
      WGS 84
      longitude_of_prime_meridian :
      0.0
      prime_meridian_name :
      Greenwich
      geographic_crs_name :
      WGS 84
      horizontal_datum_name :
      World Geodetic System 1984
      projected_crs_name :
      WGS 84 / UTM zone 32N
      grid_mapping_name :
      transverse_mercator
      latitude_of_projection_origin :
      0.0
      longitude_of_central_meridian :
      9.0
      false_easting :
      500000.0
      false_northing :
      0.0
      scale_factor_at_central_meridian :
      0.9996
      spatial_ref :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      GeoTransform :
      358545.0 30.0 0.0 5556075.0 0.0 -30.0
      array(0)
    • x
      PandasIndex
      PandasIndex(Index([358560.0, 358590.0, 358620.0, 358650.0, 358680.0, 358710.0, 358740.0,
             358770.0, 358800.0, 358830.0,
             ...
             429420.0, 429450.0, 429480.0, 429510.0, 429540.0, 429570.0, 429600.0,
             429630.0, 429660.0, 429690.0],
            dtype='float64', name='x', length=2372))
    • y
      PandasIndex
      PandasIndex(Index([5556060.0, 5556030.0, 5556000.0, 5555970.0, 5555940.0, 5555910.0,
             5555880.0, 5555850.0, 5555820.0, 5555790.0,
             ...
             5475330.0, 5475300.0, 5475270.0, 5475240.0, 5475210.0, 5475180.0,
             5475150.0, 5475120.0, 5475090.0, 5475060.0],
            dtype='float64', name='y', length=2701))
  • AREA_OR_POINT :
    Area
    TIFFTAG_SOFTWARE :
    GRASS GIS 7.8.5 with GDAL 3.1.4
    _FillValue :
    nan
    scale_factor :
    1.0
    add_offset :
    0.0
In [ ]:
# Plot Catchments within Raster

if plots == "yes": 
    f, ax = plt.subplots()
    DEM.plot.imshow(ax=ax)

    shape_catchments.plot(ax=ax,
                        facecolor="none", 
                        linewidth=0.4)
    ax.set(title="DEM with Shapefile Overlayed")


    ax.set_axis_off()
    plt.show()
In [ ]:
# Clip raster by catchment vectors
DEM_clip = DEM.rio.clip(shape_catchments.geometry, all_touched = True)

# Resample rad raster big to same resolution (30x30m)
# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = DEM_clip
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

DEM = xds_repr_match

2.6.2 RL_in¶

In [ ]:
# Incoming Longwave Radiation
# typical Range: 200-500 W/m²
# T_cold = np.nanmin(LS_stack.band_10.values)              # Assuming Tcold is 290 Kelvin, based on band_10 Histogram
Ta = T_cold              # Near Surface Air Temperature in Kelvin from T_cold (T Cold Pixel), since small H --> LST ~ Ta
sigma = 5.67*10**-8     # Boltzmann constant in W/m²/K**4

# Atmospheric transmissivity  --> May also possible to use LS_stack.AtmoTrans? But probably band specific 
z = DEM     # elevation above sea level (m) for Station (here: Bad Kreuznach elevation) 
tau_sw = 0.75+2*10**-5*z

emis_atmo = 0.85 * (-np.log(tau_sw))**0.09  # Empirical Formula for Atmospheric Emissivity # NOTE: READ on emis_atmo in env.physics page 244

RL_in = emis_atmo * sigma * Ta**4

if plots == "yes": 
    RL_in.plot()
In [ ]:
RL_in.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25e1701f290>

2.7 Solving vor Rn¶

In [ ]:
# Solving for Net Surface Radiation Flux Rn
# typical Range: 100-700 W/m²

# Which RL_in to use: 
# 1. RL_in = normal SEBAL Formula  
# 2. RL_in_Landsat = with Landsat AtmoTransmissivity raster
# 3. RLin:book = with Atmospheric emissivity from ENvironmental Physics Book

Rn = (1-albedo)*RS_in + RL_in - RL_out - (1-emis_null)*RL_in  # After SEBAL Manual

# Other Formula based on Allen 2007 / M.L. Fischer Script
# Rn_allen = RS_in - (albedo*RS_in) + RL_in - RL_out - (1-emis_null)*RL_in

Rn = Rn.where(Rn > 0)

if plots == "yes": 
    # Plot the data
    f, ax=plt.subplots()
    Rn.plot.imshow(ax=ax,
                    cmap = "Greys")
    ax.set_axis_off()
    ax.set_title("Plot of Rn - Surface Net Radiation")
    plt.show()

3. Surface Energy Balance¶

3.1 G - Soil Heat Flux¶

In [ ]:
# Soil Heat Flux
G_Rn_ratio = ((Ts-273.16)/albedo * ((0.0038*albedo)+(0.0075*(albedo**2)))*(1-0.98*(NDVI**4)))
G = G_Rn_ratio*Rn


print("Min:", np.nanmin(G_Rn_ratio.values))
print("Max:", np.nanmax(G_Rn_ratio.values))

G_Rn_ratio_filt = G_Rn_ratio.where(G_Rn_ratio>0)
G_filt = G.where(G_Rn_ratio>0)
Min: -0.19423479
Max: 0.481108
In [ ]:
if plots == "yes": 

    # Plot the data
    f, ax=plt.subplots()
    G_Rn_ratio_filt.plot.imshow(ax=ax,
                    cmap = "Greys")
    ax.set_axis_off()
    ax.set_title("Plot of G/Rn")
    plt.show()
In [ ]:
if plots == "yes": 
    # Plot the data
    f, ax=plt.subplots()
    G_filt.plot.imshow(ax=ax,
                    cmap = "Greys")
    ax.set_axis_off()
    ax.set_title("Plot of G - Soil Heat Flux")
    plt.show()

3.2 H - Sensible Heat Flux¶

3.2.1 Aerodynamic Resistance r_ah¶

In [ ]:
if wind_method == "station":

    # OLD FORUMLAS WITH ASSUMED Wind speed and momentum roughness length 
    # r_ah for neutral stability
    # Final Formula: r_ah = (np.log(z2/z1)) / (u_asterix * k)
    # Values are just assumed for testing, later taken from actual weather data according to the station


    k = 0.41    # Karmans constant

    ux = wind_vec.wind.mean()    # Wind speed at height zx (m/s) - Mean from WInd Map (Test value was 20 m/s)
    zx = 2      # Height x with measured Wind speed ux
    h_veg = 0.3      # Vegetation Heigh in m at weather station
    zom_station = 0.12*h_veg   # momentum roughness length (m) at the weather station, SEBAL Manual suggests 0.12*height_vegetation

    # 1. Friction veolocity u_asterix --> logaritmic wind profile
    u_asterix = (k*ux) / (np.log(zx / zom_station))

    # 2. Wind speed at 200m above ground u200
    u200 = u_asterix * ((np.log(200/zom_station)) / k)

    # Treshold u200 adopted from R-Package "water" (Olmedo 2016) with 1 m/s
    if u200 < 1:
        print("u200 less than treshold value (1m/s), using 4m/s instead")
        u200 = 4.0 # m/s

    # 3. Friction Velocity u_asterix_px for each Pixel 
    zom_px =  zom_map          # Momentum roughness length (m) for each Pixel of the Image

    u_asterix_px = (k*u200) / (np.log(200 / zom_px))


    # if plots == "yes": 
    #     u_asterix_px.plot()
        
    # 4. Initial Aerodynamic resistance r_ah
    z1 = 0.1     # Height 1 for Wind Measurement (m), from SEBAL Manual
    z2 = 2       # Height 2 for Wind Measurement (m), from SEBAL Manual

    r_ah = (np.log(z2/z1)) / (u_asterix_px * k)

    if plots == "yes": 
        r_ah.plot()
        
        
    if plots == "yes":
        print("Initial r_ah at", "\n",
            "Hot Pixel:", r_ah.sel(x = hot_x, y = hot_y).values, "\n",
            "Cold Pixel:", r_ah.sel(x = cold_x, y = cold_y).values)    
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
Initial r_ah at 
 Hot Pixel: 162.02797 
 Cold Pixel: 112.1345
In [ ]:
if wind_method == "map":

    # NEW FORMULA TO CALCULATE with new spatial values of Wind Speed map and Land Use based zom, see Zhang_2011_SEBAL/MODIS
    # r_ah for neutral stability
    # Final Formula: r_ah = (np.log(z2/z1)) / (u_asterix * k)
    # Values are just assumed for testing, later taken from actual weather data according to the station
    k = 0.41    # Karmans constant

    # ux = wind_map    # Wind speed at height zx (m/s) 
    zx = 2     # Height x with measured Wind speed ux, assumed to be typical weather station height (2m)

    # 1. Friction veolocity u_asterix 
    u_asterix_px = (k*wind_map) / (np.log(zx / zom_map))


        
        
        
    # 2. and 3. can be skipped due to already existing wind speed at ground level

    # 4. Initial Aerodynamic resistance r_ah
    z1 = 0.1     # Height 1 for Wind Measurement (m), from SEBAL Manual
    z2 = 2       # Height 2 for Wind Measurement (m), from SEBAL Manual

    r_ah_windmap = (np.log(z2/z1)) / (u_asterix_px * k)

    if plots == "yes": 
        r_ah_windmap.plot()
        

    r_ah = r_ah_windmap

Fixed Iterative h Calculation¶

Probleme:
-r_ah stabilisiert sich nicht -CHECK
-Obukhov Länge wird -inf für Cold pixel, wo H = 0 wird - CHECK
FRAGE: WIRD H (hot und cold) im iterativen Prozess geupdated oder NICHT...
Check dafür SEBAL Manual, pySEBAL und R-Scripte: M.F. Sebal und "water" package calcH() --> habe H_hot NICHT aktualisiert, dann geht es

In [ ]:
# 11. Add all steps into for loop
# Outside Loop
#######################
# INITIAL COLD PIXEL

# Working with Penman-Monteith Reference ETr for cold pixel
# Rn_cold = Rn.sel(x = cold_x, y = cold_y).values
# G_cold = G.sel(x = cold_x, y = cold_y).values
# LE_cold = 1.05 Reference ET (e.g. et_pt.sel(x = cold_x, y = cold_y).values)
# H_cold = Rn_cold - G_cold - LE_cold
r_ah_cold = r_ah.sel(x = cold_x, y = cold_y).values
roh_cold = 1.293     # Air density (kg/m³), Assumed value, should be calculated based on height and T_air
cp = 1004           # Air specific heat (J/kg/K)
# dT_cold = H_cold * r_ah_cold / (roh_cold*cp) 

# Test with assumption that Hcold = 0 with zero sensible heat flux
H_cold = 0
dT_cold = 0

########################
# INITIAL HOT PIXEL
Rn_hot = Rn.sel(x = hot_x, y = hot_y).values
G_hot = G.sel(x = hot_x, y = hot_y).values
LE_hot = 0   # Assuming no rest moisture, thus no latent heat 
H_hot  = Rn_hot - G_hot - LE_hot
r_ah_hot = r_ah.sel(x = hot_x, y = hot_y).values
roh_hot = 1.293     # Air density (kg/m³), Assumed value, should be calculated based on height and T_air
                    # Method after M.L. Fischer: roh_hot = (P *100)/ ((LST - dT) * Rs)
cp = 1004           # Air specific heat (J/kg/K)
dT_hot = H_hot * r_ah_hot / (roh_hot*cp)

# Ts_nonan = Ts.where(((np.isnan(r_ah) == False)), drop = True)

# 5.1 Linear Regression on T against dT --> get coeffiients b and a
Ts_cold = T_cold
Ts_hot = T_hot

# Calculate Air density roh 
pressure = 101325  # Air Pressure in Pascal, assumed Value from Wikipedia
Rs_constant = 287.058   # Specific Gas constant in J/Kg*K
g = 9.81    # Gravitational constant, m/s²

# Create Data Frame to save updated values during iteration
cols = ["Iteration", "r_ah_hot", "dT_hot", "r_ah_mean", "r_ah_cold"]
df_stabcorr = pd.DataFrame(columns=cols, index = range(15))


# Inside Loop
for i in range(15):
    # 5. Compute H from dT (near surface temperature difference)
    # Compute H and dT for hot and cold anchor pixels
    
    # COLD PIXEL
    # H_cold = Rn_cold - G_cold - LE_cold
    # r_ah_cold = r_ah.sel(x = cold_x, y = cold_y).values
    # roh_cold = 1.293     # Air density (kg/m³), Assumed value, should be calculated based on height and T_air
    # cp = 1004           # Air specific heat (J/kg/K)
    # dT_cold = H_cold * r_ah_cold / (roh_cold*cp) 
    
    
    # HOT PIXEL --> already outsite the loop
    # Get r_ah for Hot Pixel after filtering r_ah non NAN
    
    
    # Or with updated H_hot from new H Map ???? (Not sure if this or initial H Value) / something like this:
    # H_hot = H.sel(x = hot_x, y = hot_y).values
    
    
    
    
    # 5.1 Linear Regression on T against dT --> get coeffiients b and a
    # Create Arrays for Linear Regression
    Ts_linreg = np.array([Ts_cold.item(), Ts_hot.item()]).reshape((-1, 1))
    dT_linreg = np.array([dT_cold, dT_hot.item()])

    # Build Linear Model
    dT_model = LinearRegression().fit(Ts_linreg, dT_linreg)

    # Extract Coefficients after dT = b + a*Ts
    coeff_b = dT_model.intercept_
    coeff_a = dT_model.coef_ 
    
    # 5.2 Calculate dT for each Pixel based on Ts and and Linear Model dT_model
    dT_pix = coeff_b + coeff_a*Ts    # or Ts_nonan
    
    # 6. Calculate Air Temperature Ta and Air density based on Ts and dT 
    Ta_pix = Ts - dT_pix

    roh_pix = pressure / (Rs_constant * Ta_pix)

    # 7. Calculate Sensible Heat Flux H with updated r_ah
    H = (roh_pix * cp * dT_pix) / r_ah
    # H = H.where(H != 0)
    # H = H.where(H > 0)
    
    # 8. Stability correction/iteration
    # Monin-Obukhov Length L_mo
    L_mo = -(roh_pix * cp * u_asterix_px**3 * Ts) / (k * g * H)
    
    # Filter -inf values
    # L_mo = L_mo.where(((L_mo < 1000) & (L_mo > -50000)), drop = True)
    #Filter after PySEBAL 
    # L_mo = L_mo.where((L_mo > -1000))
    
    # Coefficients for Corrections functions below
    x_200 = (1-(16*200/L_mo))**0.25
    x_2 = (1-(16*2/L_mo))**0.25
    x_01 = (1-(16*0.1/L_mo))**0.25

    #####################################
    # Corections for momentum psi_m and heat transport psi_h 
    #####################################
    # 1. Correction for psi_m_200
    #####################################
    # if L_mo < 0
    psi_m_200_neg = (2*np.log((1+x_200)/ 2)) + np.log((1+x_200**2)/2) - 2*np.arctan(x_200) + 0.5*math.pi
    # if L_mo > 0
    psi_m_200_pos = -5*(2/L_mo)

    psi_m_200 = L_mo    
    psi_m_200 = psi_m_200.where(((L_mo > 0) & (np.isnan(L_mo) == False)), psi_m_200_neg) 
    psi_m_200 = psi_m_200.where(((L_mo != 0) & (np.isnan(L_mo) == False)), 0)
    psi_m_200 = psi_m_200.where(((L_mo < 0) & (np.isnan(L_mo) == False)), psi_m_200_pos) 

    #####################################
    # 2. Correction for psi_h_2
    #####################################
    # if L_mo < 0
    psi_h_2_neg = 2*np.log((1+x_2**2)/2)
    # if L_mo > 0
    psi_h_2_pos = -5*(2/L_mo)

    psi_h_2 = L_mo    
    psi_h_2 = psi_h_2.where(((L_mo > 0) & (np.isnan(L_mo) == False)), psi_h_2_neg) 
    psi_h_2 = psi_h_2.where(((L_mo != 0) & (np.isnan(L_mo) == False)), 0)
    psi_h_2 = psi_h_2.where(((L_mo < 0) & (np.isnan(L_mo) == False)), psi_h_2_pos) 
    
    #####################################
    # 3. Correction for psi_h_01
    #####################################
    # if L_mo < 0
    psi_h_01_neg = 2*np.log((1+x_01**2)/2)
    # if L_mo > 0
    psi_h_01_pos = -5*(0.1/L_mo)

    psi_h_01 = L_mo    
    psi_h_01 = psi_h_01.where(((L_mo > 0) & (np.isnan(L_mo) == False)), psi_h_01_neg) 
    psi_h_01 = psi_h_01.where(((L_mo != 0) & (np.isnan(L_mo) == False)), 0)
    psi_h_01 = psi_h_01.where(((L_mo < 0) & (np.isnan(L_mo) == False)), psi_h_01_pos) 
    
    #####################################
    
    # 9. UPDATE u and r_ah: Calculate corrected Friction Velocity u_asterix
    u_asterix_px = (k*u200) / (np.log(200/zom_map) - psi_m_200)
    
    # 10. UPDATE u and r_ah: Corrected Areodynamic Resistance r_ah is calculated
    r_ah = (np.log(z2/z1) - psi_h_2 + psi_h_01) / (u_asterix_px * k)
    
    # 11. NEW FROM 15.11.2023 before EC: Update dT_hot with initial (not updated) H_hot but updated r_ah
    r_ah_hot = r_ah.sel(x = hot_x, y = hot_y).values
    dT_hot = H_hot * r_ah_hot / (roh_hot*cp)
    
    # 11. UPDATE dT for Hot and Cold Pixels
    # COLD
    r_ah_cold = r_ah.sel(x = cold_x, y = cold_y).values
    # H_cold = H.sel(x = cold_x, y = cold_y).values
    # # H_cold = 0
    dT_cold = H_cold * r_ah_cold / (roh_cold*cp)
    
    # # HOT
    # r_ah_hot = r_ah.sel(x = hot_x, y = hot_y).values
    # H_hot = H.sel(x = hot_x, y = hot_y).values
    # dT_hot = H_hot * r_ah_hot / (roh_hot*cp)
    
    # Save values to Stability Correction DataFrame
    df_stabcorr.loc[i].Iteration = i
    df_stabcorr.loc[i].r_ah_hot = r_ah_hot
    df_stabcorr.loc[i].dT_hot = dT_hot
    df_stabcorr.loc[i].r_ah_mean = r_ah.mean() 
    df_stabcorr.loc[i].r_ah_cold = r_ah_cold
    
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
c:\Users\nicol\anaconda3\envs\ET_master_project\Lib\site-packages\xarray\core\computation.py:821: RuntimeWarning: invalid value encountered in log
  result_data = func(*input_data)
In [ ]:
# Plot Change in Stability correction values
plt.plot(df_stabcorr.r_ah_hot, label = df_stabcorr.columns[1])
plt.plot(df_stabcorr.dT_hot, label = df_stabcorr.columns[2])
plt.plot(df_stabcorr.r_ah_mean, label = df_stabcorr.columns[3])
plt.plot(df_stabcorr.r_ah_cold, label = df_stabcorr.columns[4])
plt.legend()
plt.show()
In [ ]:
H.plot.hist(bins = 50, alpha = 0.5, color = "blue")
plt.show()
In [ ]:
H.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25e1749cf10>

3.3 ET - Latent Heat Flux¶

In [ ]:
# Calculation of ET 
ETL = Rn - G_filt - H
ETL = ETL.where(ETL>=0)
In [ ]:
ETL.plot.hist()
plt.show()
In [ ]:
# Plot the data
f, ax=plt.subplots()
ETL.plot.imshow(ax=ax,
               cmap = "viridis_r")
ax.set_axis_off()
ax.set_title("")
plt.show()
In [ ]:
# From ET_lambda to ET in mm/hour
lamda = 2.25 * 10**6 # Latet heat of Vaporization J/kg
ET_inst = 3600* (ETL / lamda)

ET_inst.plot.hist()
Out[ ]:
(array([1.87000e+02, 7.46000e+02, 5.27600e+03, 3.79450e+04, 1.25805e+05,
        1.80809e+05, 2.00003e+05, 1.27423e+05, 2.80430e+04, 3.00400e+03]),
 array([0.00312311, 0.0821575 , 0.16119189, 0.24022628, 0.31926067,
        0.39829506, 0.47732945, 0.55636384, 0.63539823, 0.71443262,
        0.79346701]),
 <BarContainer object of 10 artists>)
In [ ]:
# Plot the data
f, ax=plt.subplots()
ET_inst.plot.imshow(ax=ax,
               cmap = "viridis_r")
ax.set_axis_off()
ax.set_title("")
plt.show()

Daily ETa Sum (with assumed constant ET-fraction)¶

In [ ]:
# Evaporative Fraction, assumed to be constant during the day
ETf = ETL / (H + ETL)
#ETf = ETf.where((ETf > 0) & (ETf < 1))
In [ ]:
# Plot the data
f, ax=plt.subplots()
ETf.plot.imshow(ax=ax,
               cmap = "viridis_r")
ax.set_axis_off()
ax.set_title("")
plt.show()

3.2.0 Reference ET - Import FAO56 ET0 Rasters¶

In [ ]:
# Import Daily ETp Rasters and Resample them

# Read Rasters
path_etp: str = path_data + "Original/meteo_raster_daily/"
path_etp_alten: str = path_etp + "ETp_FAO-56_Alten/ETp-FAO-56-Alten" + date_etp + ".tif"
path_etp_kellen: str = path_etp + "ETp_FAO-56_Kelle/ETp-FAO-56-kelle" + date_etp + ".tif"

etp_alten = rxr.open_rasterio(path_etp_alten, masked = True).squeeze()
etp_alten.rio.write_crs("epsg:31466", inplace=True)

etp_kellen = rxr.open_rasterio(path_etp_kellen, masked = True).squeeze()
etp_kellen.rio.write_crs("epsg:31466", inplace=True)


# Merge rasters
etp_mosaic = merge_arrays(dataarrays = [etp_alten, etp_kellen], nodata = np.nan)

#LU_mosaic = LU_mosaic .rename({"x": "long", "y": "lat"})
#LU_mosaic .rio.write_crs("epsg:32632", inplace=True)

# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = etp_mosaic
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

etp_mosaic = xds_repr_match
In [ ]:
etp_mosaic.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25d99505150>
In [ ]:
eta_day = ETf * etp_mosaic
In [ ]:
eta_day.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25d82dce390>
In [ ]:
plt.scatter(x = eta_day, y = ET_inst)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25d82e2b150>
In [ ]:
# Export ETa_day Raster 
export_path = path_data + "Processed/export/eta_day/ETa_day_" + date_SSEB + ".tif"
raster = eta_day  # xarray Array to be exported

raster.rio.to_raster(export_path)

Comparison with SSEB¶

In [ ]:
# DATA FROM SSEB

# Date in SSEB-Raster Path Format with time (hour)


# Create Path for recursive file search based on defined date
path_SSEB = path_data + "Original/SSEB/"
pattern = f"*{date_SSEB}*.tif"
paths_SSEB = sorted(glob.glob(os.path.join(path_SSEB, "**", pattern), recursive=True))
print(paths_SSEB)

# Combine both Rasters
raster_0 = rxr.open_rasterio(paths_SSEB[0]).squeeze()
raster_1 = rxr.open_rasterio(paths_SSEB[1]).squeeze()
SSEB_raster = merge_arrays(dataarrays = [raster_0, raster_1], nodata = np.nan)
SSEB_raster.rio.write_crs("epsg:32632", inplace=True)

# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = SSEB_raster
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

SSEB_raster = xds_repr_match
['D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Original/SSEB\\SSEB_ETa_Landsat-8_alten_2015_07_04_50m.tif', 'D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Original/SSEB\\SSEB_ETa_Landsat-8_kelle_2015_07_04_50m.tif']
In [ ]:
SSEB_raster.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25d54ba3890>
In [ ]:
plt.scatter(x = eta_day, y = SSEB_raster)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25d54b6c750>
In [ ]:
# Difference raster
diff = eta_day - SSEB_raster
diff.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25d54acbad0>

Comparison with EEFlux¶

In [ ]:
# DATA FROM EEFlux

# Date in EEflux-Raster Path Format with time (hour)
date_eeflux = date_SSEB

# Create Path for recursive file search based on defined date
path_dir = path_data + "Original/EEFlux/"
pattern = f"*{date_eeflux}*.tif"
path_eeflux = sorted(glob.glob(os.path.join(path_dir, "**", pattern), recursive=True))
print(path_eeflux)

# Combine both Rasters
eeflux = rxr.open_rasterio(path_eeflux[0]).squeeze()
eeflux .rio.write_crs("epsg:32632", inplace=True)
['D:/Nicolas_D/Geodaten/Masterarbeit/DATA_MesoHyd_MA-SEBAL/Original/EEFlux\\eeflux_2015_07_04.tif']
Out[ ]:
<xarray.DataArray (y: 8081, x: 7981)>
[64494461 values with dtype=float64]
Coordinates:
    band         int32 1
  * x            (x) float64 2.793e+05 2.793e+05 ... 5.187e+05 5.187e+05
  * y            (y) float64 5.692e+06 5.692e+06 ... 5.449e+06 5.449e+06
    spatial_ref  int32 0
Attributes:
    AREA_OR_POINT:  Area
    scale_factor:   1.0
    add_offset:     0.0
    long_name:      eta
xarray.DataArray
  • y: 8081
  • x: 7981
  • ...
    [64494461 values with dtype=float64]
    • band
      ()
      int32
      1
      array(1)
    • x
      (x)
      float64
      2.793e+05 2.793e+05 ... 5.187e+05
      array([279300., 279330., 279360., ..., 518640., 518670., 518700.])
    • y
      (y)
      float64
      5.692e+06 5.692e+06 ... 5.449e+06
      array([5691600., 5691570., 5691540., ..., 5449260., 5449230., 5449200.])
    • spatial_ref
      ()
      int32
      0
      crs_wkt :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      semi_major_axis :
      6378137.0
      semi_minor_axis :
      6356752.314245179
      inverse_flattening :
      298.257223563
      reference_ellipsoid_name :
      WGS 84
      longitude_of_prime_meridian :
      0.0
      prime_meridian_name :
      Greenwich
      geographic_crs_name :
      WGS 84
      horizontal_datum_name :
      World Geodetic System 1984
      projected_crs_name :
      WGS 84 / UTM zone 32N
      grid_mapping_name :
      transverse_mercator
      latitude_of_projection_origin :
      0.0
      longitude_of_central_meridian :
      9.0
      false_easting :
      500000.0
      false_northing :
      0.0
      scale_factor_at_central_meridian :
      0.9996
      spatial_ref :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      GeoTransform :
      279285.0 30.0 0.0 5691615.0 0.0 -30.0
      array(0)
    • x
      PandasIndex
      PandasIndex(Index([279300.0, 279330.0, 279360.0, 279390.0, 279420.0, 279450.0, 279480.0,
             279510.0, 279540.0, 279570.0,
             ...
             518430.0, 518460.0, 518490.0, 518520.0, 518550.0, 518580.0, 518610.0,
             518640.0, 518670.0, 518700.0],
            dtype='float64', name='x', length=7981))
    • y
      PandasIndex
      PandasIndex(Index([5691600.0, 5691570.0, 5691540.0, 5691510.0, 5691480.0, 5691450.0,
             5691420.0, 5691390.0, 5691360.0, 5691330.0,
             ...
             5449470.0, 5449440.0, 5449410.0, 5449380.0, 5449350.0, 5449320.0,
             5449290.0, 5449260.0, 5449230.0, 5449200.0],
            dtype='float64', name='y', length=8081))
  • AREA_OR_POINT :
    Area
    scale_factor :
    1.0
    add_offset :
    0.0
    long_name :
    eta
In [ ]:
# Plot Catchments within Raster

if plots == "yes": 
    f, ax = plt.subplots()
    eeflux.plot.imshow(ax=ax)

    shape_catchments.plot(ax=ax,
                        facecolor="none", 
                        linewidth=0.4)
    ax.set(title="DEM with Shapefile Overlayed")


    ax.set_axis_off()
    plt.show()
In [ ]:
# Clip raster by catchment vectors
eeflux_clip = eeflux.rio.clip(shape_catchments.geometry, all_touched = True)

# Resample rad raster big to same resolution (30x30m)
# Using reproject_match https://corteva.github.io/rioxarray/html/examples/reproject_match.html
xds_match = LS_stack
xds = eeflux_clip
xds_repr_match = xds.rio.reproject_match(xds_match)

xds_repr_match = xds_repr_match.assign_coords({
    "x": xds_match.x,
    "y": xds_match.y,
})

eeflux = xds_repr_match
In [ ]:
eeflux
Out[ ]:
<xarray.DataArray (y: 2367, x: 1369)>
array([[nan, nan, nan, ..., nan, nan, nan],
       [nan, nan, nan, ..., nan, nan, nan],
       [nan, nan, nan, ..., nan, nan, nan],
       ...,
       [nan, nan, nan, ..., nan, nan, nan],
       [nan, nan, nan, ..., nan, nan, nan],
       [nan, nan, nan, ..., nan, nan, nan]])
Coordinates:
    band         int32 1
    spatial_ref  int32 0
  * x            (x) float64 3.836e+05 3.837e+05 ... 4.246e+05 4.247e+05
  * y            (y) float64 5.551e+06 5.551e+06 5.551e+06 ... 5.48e+06 5.48e+06
Attributes:
    AREA_OR_POINT:  Area
    scale_factor:   1.0
    add_offset:     0.0
    long_name:      eta
    _FillValue:     1.7976931348623157e+308
xarray.DataArray
  • y: 2367
  • x: 1369
  • nan nan nan nan nan nan nan nan ... nan nan nan nan nan nan nan nan
    array([[nan, nan, nan, ..., nan, nan, nan],
           [nan, nan, nan, ..., nan, nan, nan],
           [nan, nan, nan, ..., nan, nan, nan],
           ...,
           [nan, nan, nan, ..., nan, nan, nan],
           [nan, nan, nan, ..., nan, nan, nan],
           [nan, nan, nan, ..., nan, nan, nan]])
    • band
      ()
      int32
      1
      array(1)
    • spatial_ref
      ()
      int32
      0
      crs_wkt :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      semi_major_axis :
      6378137.0
      semi_minor_axis :
      6356752.314245179
      inverse_flattening :
      298.257223563
      reference_ellipsoid_name :
      WGS 84
      longitude_of_prime_meridian :
      0.0
      prime_meridian_name :
      Greenwich
      geographic_crs_name :
      WGS 84
      horizontal_datum_name :
      World Geodetic System 1984
      projected_crs_name :
      WGS 84 / UTM zone 32N
      grid_mapping_name :
      transverse_mercator
      latitude_of_projection_origin :
      0.0
      longitude_of_central_meridian :
      9.0
      false_easting :
      500000.0
      false_northing :
      0.0
      scale_factor_at_central_meridian :
      0.9996
      spatial_ref :
      PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32632"]]
      GeoTransform :
      383625.0 30.0 0.0 5551065.0 0.0 -30.0
      array(0)
    • x
      (x)
      float64
      3.836e+05 3.837e+05 ... 4.247e+05
      axis :
      X
      long_name :
      x coordinate of projection
      standard_name :
      projection_x_coordinate
      units :
      metre
      array([383640., 383670., 383700., ..., 424620., 424650., 424680.])
    • y
      (y)
      float64
      5.551e+06 5.551e+06 ... 5.48e+06
      axis :
      Y
      long_name :
      y coordinate of projection
      standard_name :
      projection_y_coordinate
      units :
      metre
      array([5551050., 5551020., 5550990., ..., 5480130., 5480100., 5480070.])
    • x
      PandasIndex
      PandasIndex(Index([383640.0, 383670.0, 383700.0, 383730.0, 383760.0, 383790.0, 383820.0,
             383850.0, 383880.0, 383910.0,
             ...
             424410.0, 424440.0, 424470.0, 424500.0, 424530.0, 424560.0, 424590.0,
             424620.0, 424650.0, 424680.0],
            dtype='float64', name='x', length=1369))
    • y
      PandasIndex
      PandasIndex(Index([5551050.0, 5551020.0, 5550990.0, 5550960.0, 5550930.0, 5550900.0,
             5550870.0, 5550840.0, 5550810.0, 5550780.0,
             ...
             5480340.0, 5480310.0, 5480280.0, 5480250.0, 5480220.0, 5480190.0,
             5480160.0, 5480130.0, 5480100.0, 5480070.0],
            dtype='float64', name='y', length=2367))
  • AREA_OR_POINT :
    Area
    scale_factor :
    1.0
    add_offset :
    0.0
    long_name :
    eta
    _FillValue :
    1.7976931348623157e+308
In [ ]:
eeflux.plot()
Out[ ]:
<matplotlib.collections.QuadMesh at 0x25d54afa850>
In [ ]:
plt.scatter(x = eta_day, y = eeflux)
Out[ ]:
<matplotlib.collections.PathCollection at 0x25d99635a10>